In order to charaterize the bursting dynamics of the links in the temporal network (see Notebook 4) the methods in GDa.stats.bursting module can be used.
In particular, we do so for binary arrays of activations that, similar to spike-trains, are binary with the value one used to represent the activation of a certain link. For each activation sequence we characterize the following statistics: link avg. activation time (mu), total act. time relative to task stage time (mu_tot), CV (mean activation time over its std).
This procedure requires that we threshold the values of coherence in the temporal network. The thresholds are defined for each link (relative) or commonly for all links (absolute) based on a arbritary quartile of its coherence distribution q. Since this thresholding procedure can affect the results we also study how the threshold level influnces the statistics measured. It is also important to note that since we may compare different trial types (ODRT, int. fixation, blocked fixation) the threshold is computed commonly for all the trials.
Analysis of the burst dynamics of each link for each frequency band and stage of the ODRT.
# Adding GDa to path
import sys
raw_path = '/home/vinicius/storage1/projects/GrayData-Analysis'
sys.path.insert(1, raw_path)
import GDa.stats.bursting as bst
from GDa.stats.util import custom_mean, custom_std
#from GDa.graphics.plot_brain_sketch import *
from GDa.session import session
from GDa.temporal_network import temporal_network
from GDa.util import smooth
import seaborn as sns
import numpy as np
import xarray as xr
import matplotlib.pyplot as plt
import scipy.signal
import time
import os
import h5py
from xfrites.conn.conn_coh import conn_coherence_wav
from tqdm import tqdm
from sklearn.manifold import TSNE
from scipy import stats
# Bands names
band_names = [r'band 1', r'band 2', r'band 3', r'band 4', r'band 5']
stages = ['baseline', 'cue', 'delay', 'match']
_COH_FILE = 'super_tensor.nc'
# Parameters to read the data
idx = 3
nses = 1
nmonkey = 0
align_to = 'cue'
dirs = { 'rawdata':os.path.join(raw_path, 'GrayLab'),
'results':'Results/',
'monkey' :['lucy', 'ethyl'],
'session':'session01',
'date' :[['141014', '141015', '141205', '150128', '150211', '150304'], []] }
# Instantiating session
ses = session(raw_path = dirs['rawdata'], monkey = dirs['monkey'][nmonkey], date = dirs['date'][nmonkey][idx],
session = nses, slvr_msmod = False, align_to = align_to, evt_dt = [-0.65, 3.00])
# Load data
ses.read_from_mat()
# Defining parameters
f_start, f_end, n_freqs, sfreq = .1, 80, 50, ses.data.attrs['fsample']
freqs = np.linspace(f_start, f_end, n_freqs, endpoint=True)
delta = 15 # Downsampling factor
mode = 'morlet' # ("morlet", "mt_1", "mt_2")
if mode in ["morlet", "mt_1"]:
foi = np.array([
[0.1, 6.],
[6., 14.],
[14., 26.],
[26., 42.],
[42., 80.]
])
n_cycles = freqs/2
mt_bandwidth = None
decim_at='tfd'
elif mode == "mt_2":
foi = np.array([
[0.1, 6.],
[6., 14.],
[14., 26.],
[26., 42.],
[42., 80.]
])
freqs = foi.mean(axis=1)
W = np.ceil( foi[:,1]-foi[:,0] ) # Bandwidth
foi = None
n_cycles = np.array([3, 5, 9, 12, 16])
mt_bandwidth = np.array([2, 4, 4.28, 5.647, 9.65])
decim_at = 'coh'
kw = dict(
freqs=freqs, times=ses.data.time, roi=ses.data.roi, foi=foi, n_jobs=-1,
sfreq=sfreq, mode=mode, decim_at=decim_at, n_cycles=n_cycles, decim=delta,
sm_times=300, sm_freqs=1, block_size=2
)
# compute the coherence
coh = conn_coherence_wav(ses.data.values.astype(np.float32), **kw)
path_st = os.path.join('data', dirs['monkey'][nmonkey], dirs['date'][nmonkey][idx], 'session01', 'super_tensor.h5')
if os.path.isfile(path_st):
os.system(f'rm {path_st}')
hf = h5py.File(path_st, 'w')
hf.create_dataset('coherence', data=coh.transpose("roi", "trials", "freqs", "times"))
hf.create_dataset('freqs', data=freqs)
hf.create_dataset('roi', data=np.array( coh.roi.values, dtype='S' ) )
hf.create_dataset('tarray', data=coh.times.values)
hf.create_dataset('bands', data=foi)
[hf.create_dataset('info/'+k, data=ses.data.attrs[k]) for k in ses.data.attrs.keys()]
hf.close()
attrs['t_cue_on'].shape
del coh
When instantiated, the temporal network object will load the recording info for the monkey, date, and session specified as well as the super tensor; The trial type and the behavioral response can also be specified.
The super-tensor has dimensions [Number of pairs, Number of frequency bands, Number of trials, Time]. For Lucy the frequency bands analysed are:
The temporal_network class can also be instantiated by setting threshold as True this will automatically threshold the super-tensor (computed for all trial types independent on how the trial type parameter was set).
# Default threshold
kw = dict(q=None, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net = temporal_network(coh_file='super_tensor.nc', monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[1], wt=(20,20), drop_trials_after=True,
relative=True, verbose=True, **kw)
plt.figure(figsize=(20,30))
for i in range(len(band_names)):
plt.subplot(len(band_names), 1, i+1)
plt.imshow(net.super_tensor.isel(freqs=i, trials=slice(0,10)).stack(observations=("trials","times")),
aspect = 'auto', cmap = 'viridis', origin = 'lower',
extent = [0, 10*len(net.time), 1, net.super_tensor.sizes['roi']], vmin = 0, vmax = .8)
plt.colorbar(extend='max')
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
In the cells bellow we perform the burstness analysis for the default thresholding parameter.
# Default threshold
q_thr = 0.8
## Default threshold
kw = dict(q=q_thr, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net = temporal_network(coh_file='super_tensor.nc', monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[1], wt=(20,20), drop_trials_after=True,
relative=True, verbose=True, **kw)
Computing coherence thresholds
100%|██████████| 5/5 [00:53<00:00, 10.65s/it]
plt.figure(figsize=(10,6))
sns.violinplot(data=net.coh_thr, color='orange')
plt.title('Distribution of links\' thresholds (q=0.8)', fontsize=15);
plt.xticks(range(len(band_names)), band_names, fontsize=15);
plt.ylabel('Thresholds', fontsize=15);
To use the Python package igraph to compute network theory quantities it is necessary to instantiate a Graph object, to do so we have to provide an adjacency matrix to the Graph method. Note that the super tensor is the edge representation of the network, but we can convert it to an adjacency matrix by calling the methdo convert_to_adjacency in the class temporal_network. This method will create the variable A inside the object.
net.convert_to_adjacency()
d_eu = net.super_tensor.attrs['d_eu']
Since tha analysis is performed for each period of the experiment o have to crate masks to acess values for a specific period (baseline, cue, delay or match)
net.create_stage_masks(flatten=True)
Now let's take a look on the binary super-tensor by plotting its first 10 trials.
plt.figure(figsize=(20,30))
for i in range(len(net.freqs)):
plt.subplot(len(net.freqs), 1, i+1)
plt.imshow(net.super_tensor[:,i,:10,:].stack(observations=('trials','times')),
aspect = 'auto', cmap = 'gray', origin = 'lower',
extent = [0, 10*len(net.time), 1, net.super_tensor.sizes['roi']], vmin=0, vmax=1)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
We can also check the super-tensor averaged over trials, which gives an idea of the probability of a certain link being active at a certain moment of the trial.
plt.figure(figsize=(20,30))
aux = scipy.stats.zscore(net.super_tensor.mean(dim='trials'), axis=-1)
for i in range(len(net.freqs)):
plt.subplot(len(net.freqs), 1, i+1)
plt.imshow(aux[:,i,:],
aspect = 'auto', cmap = 'RdBu_r', origin = 'lower',
extent = [net.time[0], net.time[-1], 1, net.super_tensor.sizes['roi']],
vmin=-4, vmax=4)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
win_delay=[
[1,1.1],
[1.1, 1.2],
[1.2, 1.3],
[1.3, 1.4],
[1.4, 1.6]
]
avg_st = net.get_averaged_st(win_delay=win_delay)
plt.figure(figsize=(20,30))
aux = scipy.stats.zscore(net.super_tensor.mean(dim='trials'), axis=-1)
band = 1
for i in range(len(avg_st)):
plt.subplot(len(avg_st), 1, i+1)
aux = ( avg_st[i][:,band,:]-avg_st[i][:,band,:].mean(dim='times') ) / avg_st[i][:,band,:].std(dim='times')
plt.imshow(aux,
aspect = 'auto', cmap = 'RdBu_r', origin = 'lower',
extent = [net.time[0], net.time[-1], 1, net.super_tensor.sizes['roi']],
)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(f'alpha band, delay window = {win_delay[i]}')
plt.savefig('img/avg_st_delays.png', dpi=300)
Here we show the link-averaged time-series of activations for each trials, its median and 5% and 95% quartiles.
plt.figure(figsize=(12,25))
# Average activation sequences over links
mu_filtered_super_tensor = net.super_tensor.mean(dim='roi')
for i in range(len(net.freqs)):
plt.subplot(len(net.freqs), 1, i+1)
for t in range(net.super_tensor.shape[2]):
plt.plot(net.time,
mu_filtered_super_tensor.isel(trials=t, freqs=i).values,
color='b', lw=.1)
plt.plot(net.time,
mu_filtered_super_tensor.isel(freqs=i).median(dim='trials'),
color='k', lw=3)
plt.plot(net.time,
mu_filtered_super_tensor.isel(freqs=i).quantile(q=5/100,dim='trials'),
color='r', lw=3)
plt.plot(net.time,
mu_filtered_super_tensor.isel(freqs=i).quantile(q=95/100,dim='trials'),
color='r', lw=3)
plt.xlabel('Time [s]', fontsize=15)
plt.ylabel(r'$\langle C_{ij} \rangle$', fontsize=15)
plt.xlim([net.time[0],net.time[-1]])
plt.title(f'{band=}', fontsize=15)
plt.tight_layout()
We can also project each trial time series in the 2D space using T-SNE to better visualize how the trials are distributed. For this let's use 50 trials.
plt.figure(figsize=(8,30))
for j in tqdm( range(len(net.freqs)) ):
aux = net.super_tensor.isel(freqs=j, trials=slice(0,50))
aux = aux.stack(observations=("trials","times"))
Y = TSNE(n_components=2,
metric='hamming',
perplexity=30.0,
square_distances=True,
n_jobs=40).fit_transform(aux.T)
plt.subplot(len(net.freqs), 1, j+1)
for i in range(len(stages)):
plt.plot(Y[net.s_mask[stages[i]][:50*len(net.time)],0],
Y[net.s_mask[stages[i]][:50*len(net.time)],1],
'.', ms=2, label = stages[i])
plt.legend()
plt.title(f'band {j}', fontsize=15)
plt.ylabel(r'tsne$_{y}$', fontsize=15)
plt.xlabel(r'tsne$_{x}$', fontsize=15)
plt.tight_layout()
100%|██████████| 5/5 [03:57<00:00, 47.50s/it]
x = np.array([0,1,1,0,0,1,0,0,0,1,1,1,1,1,1,1,1,0,0,0,1,1,1])
mask = {}
mask['1'] = np.array([1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0]).astype(bool)
mask['2'] = np.logical_not(mask['1'])
plt.figure(figsize=(12,8))
plt.vlines(np.arange(len(x))[x==1], 0, 1, color='k', label='spike-train')
plt.plot(mask['1'], lw=3, label='Stage 1', color='blue')
plt.plot(mask['2'], lw=3, label='Stage 2', color='red')
plt.ylim(0,1.01)
plt.legend()
plt.tight_layout()
print(f'burst lengths = {bst.find_activation_sequences(x, dt=None, pad=False, max_size=None)}')
burst lengths = [2 1 8 3]
for idx, key in enumerate(mask):
print(f'Mask {idx}, burst lengths = {bst.masked_find_activation_sequences(x, mask[key], dt=None, drop_edges=False)}')
Mask 0, burst lengths = [2 1 3] Mask 1, burst lengths = [5 3]
for idx, key in enumerate(mask):
print(f'Mask {idx}, burst lengths = {bst.masked_find_activation_sequences(x, mask[key], dt=None, drop_edges=True)}')
Mask 0, burst lengths = [2 1] Mask 1, burst lengths = [3]
Next we compute the three quantities of interest: the mean burst duration ($\mu$), the normalized total active time ($\mu_\rm{tot}$), and (CV). Bellow we briefly describe how each of those measurements in an example scenario.
Consider the activation series for the edge $p$ composed by nodes $i$ and $j$ ($p=\{i,j\}$) and trial $T$: $A_{p}^{T}=\{00011100001100111111\}$.
the mean burst duration ($\mu$): In the example above $A_{p}^{T}$ has three activation bursts of sizes $3$, $2$, and $6$, therefore $\mu$ = mean(3,2,6) ~ 3.7 a.u. and standard deviantion $\sigma_{\mu}$ = std(3,2,6) ~ 1.7 a.u.;
the normalized total active time ($\mu_\rm{tot}$): The total active time is given by: $len(A_{p}^{T})^{-1}\sum A_{p}^{T}$. If a specif stage $s$ of the experiment is analysed for all trials we consider the concatenated activations series: $A_{p}(s)$, if $n(s)^T$ is the number of observations in stage $s$ at trial $T$ then: $\mu_\rm{tot} = (\sum_{T}n(s)^T)^{-1}\sum A_{p}(s)$;
Burstness CV: The burstness CV is computed from step one as: CV = $\sigma_{\mu}/\mu$.
# Redefining masks
net.create_stage_masks(flatten=False)
bs_stats = np.zeros((net.super_tensor.sizes['roi'],net.super_tensor.sizes['freqs'],len(stages), 4))
n_samp = []
for stage in stages:
n_samp += [net.get_number_of_samples(stage=stage, total=True)]
net.create_stage_masks(flatten=False)
for i in tqdm( range(len(band_names)) ):
bs_stats[:,i,...] = bst.tensor_burstness_stats(net.super_tensor.isel(freqs=i).values, net.s_mask,
drop_edges=True, samples=n_samp,
dt=delta/net.super_tensor.attrs['fsample'],
n_jobs=-1);
0%| | 0/5 [00:00<?, ?it/s]
20%|██ | 1/5 [00:54<03:36, 54.12s/it]
40%|████ | 2/5 [01:39<02:27, 49.08s/it]
60%|██████ | 3/5 [02:25<01:34, 47.41s/it]
80%|████████ | 4/5 [03:10<00:46, 46.61s/it]
100%|██████████| 5/5 [03:56<00:00, 47.22s/it]
titles = ['Mean burst duration', 'STD of burst duration', 'Norm. total active time', 'CV']
bins = [np.linspace(0.07,0.14,50), np.linspace(0.12,0.30,50), np.linspace(0.55,0.85,50) ]
plt.figure(figsize=(20,20))
count = 1
for i in range(len(net.freqs)):
for k in range(4):
plt.subplot(5,4,count)
for j in range(len(stages)):
sns.kdeplot(data=bs_stats[:,i,j,k], shade=True)
if i==0: plt.title(titles[k], fontsize=15)
if count in [1,5,9,13,17]: plt.ylabel(f'Band {i}', fontsize=15)
plt.legend(['baseline','cue','delay','match'])
count+=1
plt.savefig('img/stat_dists_q08.pdf', dpi=300)
n_pairs = net.super_tensor.sizes['roi']
norm = np.sqrt(n_pairs)
plt.figure(figsize=(12,8))
for k in range(4):
plt.subplot(2,2,k+1)
for j in range(len(net.freqs)):
p=bs_stats[:,j,0,k]
c=bs_stats[:,j,1,k]
d=bs_stats[:,j,2,k]
m=bs_stats[:,j,3,k]
plt.errorbar(range(4), [p.mean(), c.mean(), d.mean(), m.mean()],
[p.std(), c.std(), d.std(), m.std()])
plt.xticks(range(4), ['baseline', 'cue', 'delay', 'match'], fontsize=15)
plt.title(titles[k], fontsize=15)
if k == 0: plt.legend(band_names)
plt.tight_layout()
mu = np.zeros([49, 49, len(net.freqs), len(stages)])
sig = np.zeros([49, 49, len(net.freqs), len(stages)])
mu_tot = np.zeros([49, 49, len(net.freqs), len(stages)])
CV = np.zeros([49, 49, len(net.freqs), len(stages)])
for j in range( net.super_tensor.sizes['roi'] ):
mu[net.session_info['sources'][j],net.session_info['targets'][j], :, :] = bs_stats[j,...,0]
sig[net.session_info['sources'][j],net.session_info['targets'][j], :, :] = bs_stats[j,...,1]
mu_tot[net.session_info['sources'][j],net.session_info['targets'][j], :, :] = bs_stats[j,...,2]
CV[net.session_info['sources'][j],net.session_info['targets'][j], :, :] = bs_stats[j,...,3]
plt.figure(figsize=(20,20))
count = 1
for k in tqdm( range(len(net.freqs)) ):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
aux = (mu[:,:,k,i]+mu[:,:,k,i].T)
plt.imshow(aux, aspect='auto',
cmap='viridis', origin='lower',
vmin=0, vmax=.6 )
plt.colorbar(extend='max')
if stages[i] == 'baseline': plt.yticks(range(49), ses.data.roi.values)
else: plt.yticks([])
if k == 4: plt.xticks(range(49), ses.data.roi.values, rotation=90)
else: plt.xticks([])
if k == 0: plt.title(stages[i], fontsize=15)
if stages[i] == 'baseline': plt.ylabel(band_names[k], fontsize=15)
#plt.colorbar()
count+=1
plt.tight_layout()
100%|██████████| 5/5 [00:01<00:00, 4.96it/s]
plt.figure(figsize=(20,20))
count = 1
for k in tqdm( range(len(net.freqs)) ):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
aux = (sig[:,:,k,i]+sig[:,:,k,i].T)
plt.imshow(aux, aspect='auto',
cmap='viridis', origin='lower',
vmin=0, vmax=10*np.std(sig[:,:,k,:]+sig[:,:,k,:].transpose(1,0,2)))
plt.colorbar(extend='max')
if stages[i] == 'baseline': plt.yticks(range(49), ses.data.roi.values)
else: plt.yticks([])
if k == 4: plt.xticks(range(49), ses.data.roi.values, rotation=90)
else: plt.xticks([])
if k == 0: plt.title(stages[i], fontsize=15)
if stages[i] == 'baseline': plt.ylabel(band_names[k], fontsize=15)
#plt.colorbar()
count+=1
plt.tight_layout()
100%|██████████| 5/5 [00:01<00:00, 4.53it/s]
plt.figure(figsize=(20,20))
count = 1
for k in tqdm( range(len(net.freqs)) ):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
aux = (mu_tot[:,:,k,i]+mu_tot[:,:,k,i].T)
plt.imshow(aux, aspect='auto', cmap='viridis', origin='lower', vmin=0, vmax=5*(np.std(mu_tot[:,:,k,:]+mu_tot[:,:,k,:].transpose(1,0,2))))
plt.colorbar(extend='max')
if stages[i] == 'baseline': plt.yticks(range(49), ses.data.roi.values)
else: plt.yticks([])
if k == 4: plt.xticks(range(49), ses.data.roi.values, rotation=90)
else: plt.xticks([])
if k == 0: plt.title(stages[i], fontsize=15)
if stages[i] == 'baseline': plt.ylabel(band_names[k], fontsize=15)
#plt.colorbar()
count+=1
plt.tight_layout()
100%|██████████| 5/5 [00:01<00:00, 4.91it/s]
plt.figure(figsize=(20,20))
count = 1
for k in tqdm( range(len(net.freqs)) ):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
aux = (CV[:,:,k,i]+CV[:,:,k,i].T)
plt.imshow(aux**6, aspect='auto', cmap='viridis', origin='lower', vmin=0, vmax=.25)
plt.colorbar(extend='max')
if stages[i] == 'baseline': plt.yticks(range(49), ses.data.roi.values)
else: plt.yticks([])
if k == 4: plt.xticks(range(49), ses.data.roi.values, rotation=90)
else: plt.xticks([])
if k == 0: plt.title(stages[i], fontsize=15)
if stages[i] == 'baseline': plt.ylabel(band_names[k], fontsize=15)
#plt.colorbar()
count+=1
plt.tight_layout()
100%|██████████| 5/5 [00:01<00:00, 4.55it/s]
q_list = np.arange(0.2, 1.1, 0.2)
cv = np.zeros([net.super_tensor.shape[0], len(net.freqs), len(stages), 4, len(q_list)])
for j in tqdm( range(len(q_list)) ):
kw = dict(q=q_list[j], keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net = temporal_network(coh_file='super_tensor.nc', monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[1], wt=(30,30), drop_trials_after=True,
relative=True, verbose=False, **kw)
# Creating mask for stages
net.create_stage_masks(flatten=False)
n_samp = []
for stage in stages:
n_samp += [net.get_number_of_samples(stage=stage, total=True)]
for i in range(len(band_names)):
cv[:,i,...,j] = bst.tensor_burstness_stats(net.super_tensor.isel(freqs=i), net.s_mask,
drop_edges=True, samples=n_samp,
dt=delta/net.super_tensor.attrs['fsample'],
n_jobs=-1)
0%| | 0/5 [00:00<?, ?it/s]
20%|██ | 1/5 [07:48<31:13, 468.25s/it]
40%|████ | 2/5 [15:18<22:53, 457.95s/it]
60%|██████ | 3/5 [22:44<15:04, 452.13s/it]
80%|████████ | 4/5 [30:06<07:28, 448.15s/it]
/home/vinicius/storage1/projects/GrayData-Analysis/GDa/stats/util.py:15: RuntimeWarning: Mean of empty slice return np.nanmean(array, axis=axis) /home/vinicius/anaconda3/lib/python3.8/site-packages/numpy/lib/nanfunctions.py:1664: RuntimeWarning: Degrees of freedom <= 0 for slice. var = nanvar(a, axis=axis, dtype=dtype, out=out, ddof=ddof,
100%|██████████| 5/5 [37:22<00:00, 448.50s/it]
titles = ['Mean burst duration', 'STD mean burst dur.', 'Norm. total active time', 'CV']
plt.figure(figsize=(20,30))
count = 1
for i in range(len(net.freqs)):
for k in range(4):
plt.subplot(5,4,count)
for s in range(len(stages)):
#net.super_tensor.shape[0], len(net.bands), len(stages), 4, len(q_list)
v_median = np.median( cv[:,i,s,k,:],axis=0)
v_q05 = np.quantile(cv[:,i,s,k,:], 5/100, axis=0)
v_q95 = np.quantile(cv[:,i,s,k,:], 95/100, axis=0)
diq = (v_q95-v_q05)/2
plt.plot(q_list, v_median, label=stages[s])
plt.fill_between(q_list, v_median-diq, v_median+diq, alpha=0.2)
count +=1
plt.xlim([0.2,0.8])
if k == 0: plt.ylabel(f'Band {i}', fontsize=15)
if i == 0: plt.title(titles[k], fontsize=15)
if i < 4: plt.xticks([])
if i == 0 and k==0: plt.legend()
if i == 4: plt.xlabel('q', fontsize=15)
plt.tight_layout()
plt.savefig('img/q_dependece.pdf', dpi=300)
def plot_dists(q):
kw = dict(q=q, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net = temporal_network(coh_file=_COH_FILE, monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[1], wt=(20,20), drop_trials_after=True,
relative=True, verbose=False, **kw)
net.create_stage_masks(flatten=False)
# Compute stats
# Burstiness analysis statistics
bs_stats = np.zeros((net.super_tensor.sizes['roi'],net.super_tensor.sizes['freqs'],len(stages), 4))
for j in tqdm( range(len(net.freqs)) ):
#bs_stats[:,:,j,:]=np.apply_along_axis(bst.compute_burstness_stats, -1,
# net.get_data_from(stage=stages[j], pad=True),
# samples = net.get_number_of_samples(stage=stages[j], total=True),
# dt=delta/net.super_tensor.attrs['fsample'])
bs_stats[:,j,:,:]=bst.tensor_burstness_stats(net.super_tensor.isel(freqs=j).values, net.s_mask,
drop_edges=True, samples=n_samp,
dt=delta/net.super_tensor.attrs['fsample'],
n_jobs=-1);
plt.figure(figsize=(20,20))
count = 1
for i in range(len(net.freqs)):
for k in range(4):
plt.subplot(5,4,count)
for j in range(len(stages)):
sns.kdeplot(data=bs_stats[:,i,j,k], shade=True)
if i==0: plt.title(titles[k], fontsize=15)
#if i<4: plt.xticks([])
if count in [1,4,7,10,13]: plt.ylabel(f'Band {i}', fontsize=15)
plt.legend(['baseline','cue','delay','match'])
count+=1
plot_dists(0.3)
plt.savefig('img/stat_dists_q03.pdf', dpi=300)
0%| | 0/5 [00:00<?, ?it/s]
20%|██ | 1/5 [00:45<03:02, 45.53s/it]
40%|████ | 2/5 [01:32<02:18, 46.22s/it]
60%|██████ | 3/5 [02:19<01:33, 46.54s/it]
80%|████████ | 4/5 [03:05<00:46, 46.52s/it]
100%|██████████| 5/5 [03:52<00:00, 46.53s/it]
plot_dists(0.8)
plt.savefig('img/stat_dists_q08.pdf', dpi=300)
0%| | 0/5 [00:00<?, ?it/s]
20%|██ | 1/5 [00:43<02:55, 43.90s/it]
40%|████ | 2/5 [01:28<02:13, 44.54s/it]
60%|██████ | 3/5 [02:13<01:29, 44.74s/it]
80%|████████ | 4/5 [02:58<00:44, 44.86s/it]
100%|██████████| 5/5 [03:43<00:00, 44.79s/it]
plt.figure(figsize=(15,20))
count = 1
x_min, x_max = bs_stats[...,0].min(), bs_stats[...,0].max()
y_min, y_max = bs_stats[...,3].min(), bs_stats[...,3].max()
bins = [np.linspace(x_min,x_max,20),np.linspace(y_min, y_max,20)]
for j in tqdm( range(len(net.freqs)) ):
Hb, xb, yb = np.histogram2d(bs_stats[:,j,0,0], bs_stats[:,j,0,3],
bins=bins, density = True )
for i in range(1,len(stages)):
# Plotting top links
plt.subplot(len(net.freqs), len(stages)-1, count)
H, xb, yb = np.histogram2d(bs_stats[:,j,i,0], bs_stats[:,j,i,3],
bins=bins, density = True )
plt.imshow(H-Hb, aspect='auto', cmap='RdBu_r', origin='lower',
extent=[1000*xb[0],1000*xb[-1],yb[0],yb[-1]],
interpolation='gaussian', vmin=-1000, vmax=1000)
plt.colorbar(extend='both')
if j < 4 : plt.xticks([])
if i > 1 : plt.yticks([])
if j == 4: plt.xlabel('Mean burst dur. [ms]', fontsize=15)
if j == 0: plt.title(f'{stages[i]}-baseline', fontsize=15)
if i == 1: plt.ylabel('CV', fontsize=15)
count += 1
plt.tight_layout()
100%|██████████| 5/5 [00:00<00:00, 12.31it/s]
plt.figure(figsize=(15,20))
count = 1
x_min, x_max = d_eu.min(), d_eu.max()
y_min, y_max = bs_stats[...,3].min(), bs_stats[...,3].max()
bins = [np.linspace(x_min,x_max,20),np.linspace(y_min, y_max,20)]
for j in tqdm( range(len(net.freqs)) ):
Hb, xb, yb = np.histogram2d(d_eu, bs_stats[:,j,0,3],
bins=bins, density = True )
for i in range(1,len(stages)):
# Plotting top links
plt.subplot(len(net.freqs), len(stages)-1, count)
H, xb, yb = np.histogram2d(d_eu, bs_stats[:,j,i,3],
bins=bins, density = True )
plt.imshow(H-Hb, aspect='auto', cmap='RdBu_r', origin='lower',
extent=[xb[0],xb[-1],yb[0],yb[-1]],
interpolation='gaussian', vmin=-0.02, vmax=0.02)
plt.colorbar()
if j < 4 : plt.xticks([])
if i > 1 : plt.yticks([])
if j == 4: plt.xlabel('Euclidian distance', fontsize=15)
if j == 0: plt.title(f'{stages[i]}-baseline', fontsize=15)
if i == 1: plt.ylabel('CV', fontsize=15)
count += 1
plt.tight_layout()
100%|██████████| 5/5 [00:00<00:00, 8.20it/s]
x_min, x_max = cv[...,0,:].min(), cv[...,0,:].max()
y_min, y_max = cv[...,3,:].min(), cv[...,3,:].max()
bins = [np.linspace(x_min,x_max,20),np.linspace(y_min, y_max,20)]
for iq in tqdm( range(cv.shape[-1]) ):
plt.figure(figsize=(15,20))
count = 1
for j in range(len(net.freqs)):
for i in range(0,len(stages)):
# Plotting top links
plt.subplot(len(net.freqs), len(stages), count)
H, xb, yb = np.histogram2d(cv[:,j,i,0,iq], cv[:,j,i,3,iq],
bins=bins, density = True )
plt.imshow(H, aspect='auto', cmap='viridis', origin='lower',
extent=[1000*xb[0],1000*xb[-1],yb[0],yb[-1]],
interpolation='gaussian', vmin=0, vmax=300)
plt.colorbar(extend='max')
if j < 4 : plt.xticks([])
if i > 1 : plt.yticks([])
if j == 4: plt.xlabel('Mean burst dur. [ms]', fontsize=15)
if j == 0: plt.title(f'{stages[i]}', fontsize=15)
if i == 0: plt.ylabel('CV', fontsize=15)
count += 1
plt.suptitle(f'q = {q_list[iq]:.2f}', fontsize=20)
plt.tight_layout()
plt.savefig(f'img/cv_mut_{iq}.png')
plt.close()
x_min, x_max = cv[...,2,:].min(), cv[...,2,:].max()
y_min, y_max = cv[...,3,:].min(), cv[...,3,:].max()
bins = [np.linspace(x_min,x_max,20),np.linspace(y_min, y_max,20)]
for iq in tqdm( range(cv.shape[-1]) ):
plt.figure(figsize=(15,20))
count = 1
for j in range(len(net.bands)):
for i in range(0,len(stages)):
# Plotting top links
plt.subplot(len(net.bands), len(stages), count)
H, xb, yb = np.histogram2d(cv[:,j,i,2,iq], cv[:,j,i,3,iq],
bins=bins, density = True )
plt.imshow(H, aspect='auto', cmap='viridis', origin='lower',
extent=[xb[0],xb[-1],yb[0],yb[-1]],
interpolation='gaussian', vmin=0, vmax=300)
plt.colorbar(extend='max')
if j < 4 : plt.xticks([])
if i > 1 : plt.yticks([])
if j == 4: plt.xlabel('Fraction of active time', fontsize=15)
if j == 0: plt.title(f'{stages[i]}', fontsize=15)
if i == 0: plt.ylabel('CV', fontsize=15)
count += 1
plt.suptitle(f'q = {q_list[iq]:.2f}', fontsize=20)
plt.tight_layout()
plt.savefig(f'img/cv_mutot_{iq}.png')
plt.close()
100%|██████████| 8/8 [00:15<00:00, 1.88s/it]
x_min, x_max = d_eu.min(), d_eu.max()
y_min, y_max = cv[...,3,:].min(), cv[...,3,:].max()
bins = [np.linspace(x_min,x_max,20),np.linspace(y_min, y_max,20)]
for iq in tqdm( range(cv.shape[-1]) ):
plt.figure(figsize=(15,20))
count = 1
for j in range(len(net.bands)):
for i in range(0,len(stages)):
# Plotting top links
plt.subplot(len(net.bands), len(stages), count)
H, xb, yb = np.histogram2d(d_eu, cv[:,j,i,3,iq],
bins=bins, density = True )
plt.imshow(H, aspect='auto', cmap='viridis', origin='lower',
extent=[xb[0],xb[-1],yb[0],yb[-1]],
vmin=0, vmax=0.02)
plt.colorbar(extend='max')
if j < 4 : plt.xticks([])
if i > 1 : plt.yticks([])
if j == 4: plt.xlabel(r'd_{\rm eu}', fontsize=15)
if j == 0: plt.title(f'{stages[i]}', fontsize=15)
if i == 0: plt.ylabel('CV', fontsize=15)
count += 1
plt.suptitle(f'q = {q_list[iq]:.2f}', fontsize=20)
plt.tight_layout()
plt.savefig(f'img/cv_deu_{iq}.png')
plt.close()
100%|██████████| 8/8 [00:14<00:00, 1.78s/it]
from GDa.net.layerwise import *
# Default threshold
kw = dict(q=None, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net = temporal_network(coh_file='super_tensor.nc', monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[1], wt=(20,20), drop_trials_after=True,
relative=True, verbose=True, **kw)
CCij = np.zeros([len(net.freqs), net.super_tensor.sizes['roi'],net.super_tensor.sizes['roi'],len(stages)])
for s in tqdm( range(len(stages)) ):
aux = net.get_data_from(stage=stages[s],pad=False)
for b in range(len(net.freqs)):
CCij[b,:,:,s] = np.corrcoef(aux[:,b,:].values, rowvar=True)
CCij[:,range(1176),range(1176),:] = 0
100%|██████████| 4/4 [00:13<00:00, 3.48s/it]
plt.figure(figsize=(20,20))
count = 1
for b in range(len(net.freqs)):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
plt.imshow(CCij[b,...,i], aspect='auto', cmap='RdBu_r', origin='lower',
vmin=-CCij[b,...].mean()*3, vmax=CCij[b,...].mean()*3)
if b == 0: plt.title(stages[i], fontsize=15)
if i == 0: plt.ylabel(band_names[b], fontsize=15)
plt.colorbar(extend='both')
count+=1
plt.tight_layout()
print(f'{CCij.shape=}')
CCij.shape=(5, 1176, 1176, 4)
p_mc, m_mc = [], []
for band in tqdm( range(len(net.freqs)) ):
p, m = compute_network_partition(CCij[band,:,:,None,:], kw_louvain=dict(qtype='sta'), backend='brainconn',verbose=True)
p_mc += [p]
m_mc += [m]
p_mc = xr.concat(p_mc, dim='freqs')
m_mc = xr.concat(m_mc, dim='freqs')
0%| | 0/5 [00:00<?, ?it/s]
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. [Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 2.4s finished 20%|██ | 1/5 [00:02<00:09, 2.46s/it]
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. [Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 2.2s finished 40%|████ | 2/5 [00:04<00:06, 2.33s/it]
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. [Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 2.6s finished 60%|██████ | 3/5 [00:07<00:04, 2.48s/it]
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. [Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 2.2s finished 80%|████████ | 4/5 [00:09<00:02, 2.38s/it]
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers. [Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 2.4s finished 100%|██████████| 5/5 [00:11<00:00, 2.39s/it]
idx = np.argsort( p_mc.isel(trials=0,times=0,freqs=0) )
plt.figure(figsize=(20,20))
count = 1
for b in range(len(net.freqs)):
for i in range(len(stages)):
plt.subplot(len(net.freqs),len(stages),count)
# Sorting by module label
idx = np.argsort( p_mc.isel(trials=0,times=i,freqs=b) )
A = CCij[b,...,i]
plt.imshow(A[np.ix_(idx,idx)], aspect='auto', cmap='RdBu_r', origin='lower',
vmin=-CCij[b,...].mean()*3, vmax=CCij[b,...].mean()*3)
if b == 0: plt.title(stages[i], fontsize=15)
if i == 0: plt.ylabel(band_names[b], fontsize=15)
plt.colorbar(extend='both')
count+=1
plt.savefig('img/mc_surr.png', dpi=200)
plt.tight_layout()
plt.figure(figsize=(20,20))
count=1
for b in range(len(net.freqs)):
for i in range(len(stages)):
TrS = []
plt.subplot(len(net.freqs),len(stages),count)
membership = p_mc.isel(freqs=b,trials=0,times=i).values
for c in np.unique(membership):
idx = np.arange(CCij.shape[1],dtype=int)[membership==c]
#r_i = net.super_tensor.attrs['sources'][idx]
A = CCij[b,...,i]
A = A[idx,idx[:,None]]
TrS += [A.sum()]
plt.plot(range(membership.max()+1), TrS)
plt.xlim(0, membership.max())
if b == 0: plt.title(stages[i], fontsize=15)
if i == 0: plt.ylabel(band_names[b], fontsize=15)
if b == len(net.freqs)-1: plt.xlabel('Community number', fontsize=15)
count +=1
# All possible band pairs
i,j = np.triu_indices(5,k=1)
bp = np.array([i,j]).T
CCmulti = np.zeros([len(bp),net.super_tensor.sizes['roi'],net.super_tensor.sizes['roi'],len(stages)])
for s in tqdm( range(len(stages)) ):
aux = net.get_data_from(stage=stages[s], pad=False)
for b, band_1, band_2 in zip(range( len(bp) ), i, j):
CCmulti[b,...,s]=\
np.corrcoef(aux[:,band_1,:], aux[:,band_2,:], rowvar=True)[net.super_tensor.sizes['roi']:,:net.super_tensor.sizes['roi']]
100%|██████████| 4/4 [00:40<00:00, 10.17s/it]
plt.figure(figsize=(20,8))
count = 1
for b in range( len(bp) ):
plt.subplot(2,5,count)
plt.imshow(CCmulti[b,...,2], aspect='auto', cmap='RdBu_r', origin='lower', vmin=-0.04,vmax=0.04)
plt.colorbar(extend='both')
plt.title(f'band {bp[b,0]}-band {bp[b,1]}', fontsize=15)
count+=1
plt.tight_layout()
# Default threshold
kw = dict(q=q_thr, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net_fix = temporal_network(coh_file='super_tensor.nc', monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[2],
behavioral_response=None, wt=(30,30), drop_trials_after=True,
relative=True, verbose=True, **kw)
Computing coherence thresholds
100%|██████████| 5/5 [00:47<00:00, 9.51s/it]
plt.figure(figsize=(20,30))
for i in range(len(band_names)):
plt.subplot(len(band_names), 1, i+1)
plt.imshow(net_fix.super_tensor[:,i,:10,:].stack(observations=('trials','times')),
aspect = 'auto', cmap = 'gray', origin = 'lower',
extent = [0, 10*len(net.time), 1, net.super_tensor.sizes['roi']], vmin = 0, vmax = 1)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
plt.figure(figsize=(20,30))
aux = scipy.stats.zscore(net_fix.super_tensor.mean(dim='trials'), axis=-1)
for i in range(len(band_names)):
plt.subplot(len(band_names), 1, i+1)
plt.imshow(aux[:,i,:],
aspect = 'auto', cmap = 'RdBu_r', origin = 'lower',
extent = [net.time[0], net.time[-1], 1, net.super_tensor.sizes['roi']],
vmin=-4, vmax=4)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
# Redefining masks
net_fix.create_stage_masks(flatten=False)
bs_stats_fix = np.zeros((net_fix.super_tensor.sizes['roi'],net_fix.super_tensor.sizes['freqs'],len(stages), 4))
n_samp = []
for stage in stages:
n_samp += [net_fix.get_number_of_samples(stage=stage, total=True)]
net_fix.create_stage_masks(flatten=False)
for i in tqdm( range(len(band_names)) ):
bs_stats_fix[:,i,...] = bst.tensor_burstness_stats(net_fix.super_tensor.isel(freqs=i).values, net_fix.s_mask,
drop_edges=True, samples=n_samp,
dt=delta/net_fix.super_tensor.attrs['fsample'],
n_jobs=-1);
0%| | 0/5 [00:00<?, ?it/s]
20%|██ | 1/5 [00:13<00:52, 13.19s/it]
40%|████ | 2/5 [00:21<00:30, 10.13s/it]
60%|██████ | 3/5 [00:29<00:18, 9.10s/it]
80%|████████ | 4/5 [00:36<00:08, 8.61s/it]
100%|██████████| 5/5 [00:45<00:00, 9.03s/it]
titles = ['Mean burst duration', 'STD mean burst dur.','Norm. total active time', 'CV']
colors = ['b', 'r', 'g', 'm']
plt.figure(figsize=(20,20))
count = 1
for i in range(len(net_fix.freqs)):
for k in range(4):
plt.subplot(5,4,count)
for j in [2]:
sns.kdeplot(data=bs_stats_fix[:,i,j,k], shade=False, color=colors[j], label=f'{stages[j]}, FIX')
sns.kdeplot(data=bs_stats[:,i,j,k], shade=False, ls='--', color=colors[j], label=f'{stages[j]}, ODRT')
if i==0: plt.title(titles[k], fontsize=15)
#if i<4: plt.xticks([])
if count in [1,5,8,11,14]: plt.ylabel(f'Band {i}', fontsize=15)
plt.legend()
count+=1
plt.tight_layout()
net_fix.create_stage_masks(flatten=True)
plt.figure(figsize=(8,30))
for j in tqdm( range(len(net_fix.freqs)) ):
aux = net_fix.super_tensor.isel(freqs=j, trials=slice(0,50))
aux = aux.stack(observations=("trials","times"))
Y = TSNE(n_components=2,
metric='hamming',
perplexity=30.0,
square_distances=True,
n_jobs=40).fit_transform(aux.T)
plt.subplot(len(net_fix.freqs), 1, j+1)
for i in range(len(stages)):
plt.plot(Y[net_fix.s_mask[stages[i]][:50*len(net_fix.time)],0],
Y[net_fix.s_mask[stages[i]][:50*len(net_fix.time)],1],
'.', ms=2, label = stages[i])
plt.legend()
plt.title(f'band {j}', fontsize=15)
plt.ylabel(r'tsne$_{y}$', fontsize=15)
plt.xlabel(r'tsne$_{x}$', fontsize=15)
plt.tight_layout()
100%|██████████| 5/5 [03:24<00:00, 40.97s/it]
# Default threshold
kw = dict(q=q_thr, keep_weights=False)
# Instantiating a temporal network object without thresholding the data
net_0 = temporal_network(coh_file=_COH_FILE, monkey=dirs['monkey'][nmonkey],
session=1, date='150128', trial_type=[1],
behavioral_response=[0], wt=(30,30), drop_trials_after=True,
relative=True, verbose=True, **kw)
Computing coherence thresholds
100%|██████████| 5/5 [00:49<00:00, 9.99s/it]
plt.figure(figsize=(20,30))
for i in range(len(band_names)):
plt.subplot(len(band_names), 1, i+1)
plt.imshow(net_0.super_tensor[:,i,:,:].stack(observations=('trials','times')),
aspect = 'auto', cmap = 'gray', origin = 'lower',
extent = [0, 16*len(net_0.time), 1, net_0.super_tensor.sizes['roi']], vmin = 0, vmax = 1)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
plt.figure(figsize=(20,30))
aux = scipy.stats.zscore(net_0.super_tensor.mean(dim='trials'), axis=-1)
for i in range(len(band_names)):
plt.subplot(len(band_names), 1, i+1)
plt.imshow(aux[:,i,:],
aspect = 'auto', cmap = 'RdBu_r', origin = 'lower',
extent = [net.time[0], net.time[-1], 1, net.super_tensor.sizes['roi']],
vmin=-4, vmax=4)
plt.colorbar()
plt.ylabel('Pair Number')
plt.xlabel('Time [a.u.]')
plt.title(band_names[i])
# Redefining masks
net_0.create_stage_masks(flatten=False)
bs_stats_0 = np.zeros((net_0.super_tensor.sizes['roi'],net_0.super_tensor.sizes['freqs'],len(stages), 4))
n_samp = []
for stage in stages:
n_samp += [net_0.get_number_of_samples(stage=stage, total=True)]
net_0.create_stage_masks(flatten=False)
for i in tqdm( range(len(band_names)) ):
bs_stats_0[:,i,...] = bst.tensor_burstness_stats(net_0.super_tensor.isel(freqs=i).values, net_0.s_mask,
drop_edges=True, samples=n_samp,
dt=delta/net_fix.super_tensor.attrs['fsample'],
n_jobs=-1);
0%| | 0/5 [00:00<?, ?it/s]
/home/vinicius/storage1/projects/GrayData-Analysis/GDa/stats/util.py:15: RuntimeWarning: Mean of empty slice return np.nanmean(array, axis=axis) /home/vinicius/anaconda3/lib/python3.8/site-packages/numpy/lib/nanfunctions.py:1664: RuntimeWarning: Degrees of freedom <= 0 for slice. var = nanvar(a, axis=axis, dtype=dtype, out=out, ddof=ddof, 20%|██ | 1/5 [00:13<00:53, 13.46s/it]
40%|████ | 2/5 [00:19<00:27, 9.25s/it]
60%|██████ | 3/5 [00:26<00:16, 8.23s/it]
80%|████████ | 4/5 [00:32<00:07, 7.38s/it]
100%|██████████| 5/5 [00:39<00:00, 7.91s/it]
titles = ['Mean burst duration', 'STD mean burst dur.','Norm. total active time', 'CV']
colors = ['b', 'r', 'g', 'm']
plt.figure(figsize=(20,20))
count = 1
for i in range(len(net_0.freqs)):
for k in range(4):
plt.subplot(5,4,count)
for j in [2]:
sns.kdeplot(data=bs_stats_0[:,i,j,k], shade=False, color=colors[j], label=f'{stages[j]}, Failed')
sns.kdeplot(data=bs_stats[:,i,j,k], shade=False, ls='--', color=colors[j], label=f'{stages[j]}, ODRT')
#sns.histplot(data=bs_stats_0[:,i,j,k], stat='density', color=colors[j], label=f'{stages[j]}, FIX')
#sns.histplot(data=bs_stats[:,i,j,k], stat='density', color=colors[j], label=f'{stages[j]}, ODRT')
if i==0: plt.title(titles[k], fontsize=15)
#if i<4: plt.xticks([])
if count in [1,5,8,11,14]: plt.ylabel(f'Band {i}', fontsize=15)
plt.legend()
count+=1
plt.tight_layout()